This rebases the lockfile branch to master and updates all code necessary to get
the tests passing again.
bit versions of cargo on unix you would use:
```
-$ ./configure --target i686-unknown-linux-gnu,x86_64-unknown-linux-gnu
+$ ./configure --target=i686-unknown-linux-gnu,x86_64-unknown-linux-gnu
```
## Contributing to the Docs
-#![crate_name="cargo-generate-lockfile"]
#![feature(phase)]
-extern crate cargo;
-
-#[phase(plugin, link)]
-extern crate hammer;
-
-#[phase(plugin, link)]
-extern crate log;
-
extern crate serialize;
+extern crate cargo;
+extern crate docopt;
+#[phase(plugin)] extern crate docopt_macros;
+#[phase(plugin, link)] extern crate log;
use std::os;
use cargo::ops;
use cargo::{execute_main_without_stdin};
use cargo::core::MultiShell;
use cargo::util::{CliResult, CliError};
-use cargo::util::important_paths::find_project_manifest;
+use cargo::util::important_paths::find_root_manifest_for_cwd;
-#[deriving(PartialEq,Clone,Decodable,Encodable)]
-pub struct Options {
- manifest_path: Option<String>
-}
+docopt!(Options, "
+Generate the lockfile for a project
+
+Usage:
+ cargo-generate-lockfile [options]
-hammer_config!(Options)
+Options:
+ -h, --help Print this message
+ --manifest-path PATH Path to the manifest to compile
+ -v, --verbose Use verbose output
+
+All of the trailing arguments are passed as to the binary to run.
+", flag_manifest_path: Option<String>)
fn main() {
- execute_main_without_stdin(execute);
+ execute_main_without_stdin(execute, false);
}
fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
debug!("executing; cmd=cargo-clean; args={}", os::args());
-
- let root = match options.manifest_path {
- Some(path) => Path::new(path),
- None => try!(find_project_manifest(&os::getcwd(), "Cargo.toml")
- .map_err(|_| {
- CliError::new("Could not find Cargo.toml in this \
- directory or any parent directory",
- 102)
- }))
- };
+ shell.set_verbose(options.flag_verbose);
+ let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
ops::generate_lockfile(&root, shell, true)
.map(|_| None).map_err(|err| CliError::from_boxed(err, 101))
}
fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
- let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
shell.set_verbose(options.flag_verbose);
+ let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
let mut compile_opts = ops::CompileOptions {
update: options.flag_update_remotes,
name: String,
source_id: SourceId,
req: VersionReq,
- transitive: bool
+ transitive: bool,
+ only_match_name: bool,
}
impl Dependency {
name: name.to_string(),
source_id: source_id.clone(),
req: version,
- transitive: true
+ transitive: true,
+ only_match_name: false,
})
}
+ pub fn new_override(name: &str, source_id: &SourceId) -> Dependency {
+ Dependency {
+ name: name.to_string(),
+ source_id: source_id.clone(),
+ req: VersionReq::any(),
+ transitive: true,
+ only_match_name: true,
+ }
+ }
+
pub fn get_version_req(&self) -> &VersionReq {
&self.req
}
debug!(" a={}; b={}", self.source_id, sum.get_source_id());
self.name.as_slice() == sum.get_name() &&
- self.req.matches(sum.get_version()) &&
- &self.source_id == sum.get_source_id()
+ (self.only_match_name || (self.req.matches(sum.get_version()) &&
+ &self.source_id == sum.get_source_id()))
}
}
PackageId,
Registry,
Target,
- Summary
+ Summary,
};
use core::dependency::SerializedDependency;
use util::{CargoResult, graph};
-use std::vec::Vec;
use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package};
use util::{CargoResult, ChainError, Config, human};
impl Registry for Vec<Summary> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
- debug!("querying, summaries={}",
+ debug!("querying for {}, summaries={}", dep,
self.iter().map(|s| s.get_package_id().to_string()).collect::<Vec<String>>());
- Ok(self.iter()
- .filter(|summary| dep.matches(*summary))
- .map(|summary| summary.clone())
- .collect())
+ Ok(self.iter().filter(|summary| dep.matches(*summary))
+ .map(|summary| summary.clone()).collect())
}
}
pub struct PackageRegistry<'a> {
sources: SourceMap,
- overrides: Vec<Summary>,
- summaries: Vec<Summary>,
+ overrides: Vec<SourceId>,
config: &'a mut Config<'a>
}
impl<'a> PackageRegistry<'a> {
pub fn new<'a>(source_ids: Vec<SourceId>,
- override_ids: Vec<SourceId>,
- config: &'a mut Config<'a>) -> CargoResult<PackageRegistry<'a>> {
+ config: &'a mut Config<'a>) -> CargoResult<PackageRegistry<'a>> {
let mut reg = PackageRegistry::empty(config);
let source_ids = dedup(source_ids);
try!(reg.load(id, false));
}
- for id in override_ids.iter() {
- try!(reg.load(id, true));
- }
-
Ok(reg)
}
PackageRegistry {
sources: SourceMap::new(),
overrides: vec!(),
- summaries: vec!(),
config: config
}
}
// TODO: Return earlier if fail
assert!(package_ids.len() == ret.len(),
- "could not get packages from registry; ids={}", package_ids);
+ "could not get packages from registry; ids={}; ret={}",
+ package_ids, ret);
Ok(ret)
}
}
fn ensure_loaded(&mut self, namespace: &SourceId) -> CargoResult<()> {
- if self.sources.contains(namespace) {
- return Ok(());
- }
+ if self.sources.contains(namespace) { return Ok(()); }
try!(self.load(namespace, false));
Ok(())
}
- fn ensure_loaded(&mut self, source_id: &SourceId) -> CargoResult<()> {
- if self.searched.contains(source_id) { return Ok(()); }
- try!(self.load(source_id, false));
+ pub fn add_overrides(&mut self, ids: Vec<SourceId>) -> CargoResult<()> {
+ for id in ids.iter() {
+ try!(self.load(id, true));
+ }
Ok(())
}
fn load(&mut self, source_id: &SourceId, override: bool) -> CargoResult<()> {
(|| {
let mut source = source_id.load(self.config);
- let dst = if override {&mut self.overrides} else {&mut self.summaries};
// Ensure the source has fetched all necessary remote data.
try!(source.update());
- // Get the summaries
- for summary in (try!(source.list())).iter() {
- assert!(!dst.contains(summary), "duplicate summaries: {}", summary);
- dst.push(summary.clone());
- // self.summaries.push(summary.clone());
+ if override {
+ self.overrides.push(source_id.clone());
}
// Save off the source
- self.sources.insert(namespace, source);
-
- // Track that the source has been searched
- self.searched.push(source_id.clone());
+ self.sources.insert(source_id, source);
Ok(())
}).chain_error(|| human(format!("Unable to update {}", source_id)))
}
- fn query_overrides(&self, dep: &Dependency) -> Vec<Summary> {
- self.overrides.iter()
- .filter(|s| s.get_name() == dep.get_name())
- .map(|s| s.clone())
- .collect()
+ fn query_overrides(&mut self, dep: &Dependency)
+ -> CargoResult<Vec<Summary>> {
+ let mut ret = Vec::new();
+ for s in self.overrides.iter() {
+ let src = self.sources.get_mut(s).unwrap();
+ let dep = Dependency::new_override(dep.get_name(), s);
+ ret.push_all_move(try!(src.query(&dep)));
+ }
+ Ok(ret)
}
}
impl<'a> Registry for PackageRegistry<'a> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
- let overrides = self.query_overrides(dep);
+ let overrides = try!(self.query_overrides(dep));
- if overrides.is_empty() {
+ if overrides.len() == 0 {
// Ensure the requested source_id is loaded
try!(self.ensure_loaded(dep.get_source_id()));
- self.summaries.query(dep)
+ let mut ret = Vec::new();
+ for src in self.sources.sources_mut() {
+ ret.push_all_move(try!(src.query(dep)));
+ }
+ Ok(ret)
} else {
Ok(overrides)
}
use std::collections::HashMap;
use std::fmt;
-use serialize::{Encodable, Encoder};
+
use serialize::{Encodable, Encoder, Decodable, Decoder};
use util::graph::{Nodes,Edges};
#[deriving(Encodable, Decodable, Show)]
pub struct EncodableResolve {
- package: Vec<EncodableDependency>,
+ package: Option<Vec<EncodableDependency>>,
root: EncodableDependency
}
pub fn to_resolve(&self, default: &SourceId) -> CargoResult<Resolve> {
let mut g = Graph::new();
- add_pkg_to_graph(&mut g, &self.root, default);
+ try!(add_pkg_to_graph(&mut g, &self.root, default));
- for dep in self.package.iter() {
- add_pkg_to_graph(&mut g, dep, default);
+ match self.package {
+ Some(ref packages) => {
+ for dep in packages.iter() {
+ try!(add_pkg_to_graph(&mut g, dep, default));
+ }
+ }
+ None => {}
}
let root = self.root.to_package_id(default);
impl<E, D: Decoder<E>> Decodable<D, E> for EncodablePackageId {
fn decode(d: &mut D) -> Result<EncodablePackageId, E> {
let string: String = raw_try!(Decodable::decode(d));
- let regex = regex!(r"^([^ ]+) ([^ ]+) (?:\(([^\)]+)\))?$");
+ let regex = regex!(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$");
let captures = regex.captures(string.as_slice()).expect("invalid serialized PackageId");
let name = captures.at(1);
let encodable = ids.iter().filter_map(|&id| {
if self.root == *id { return None; }
- Some(encodable_resolve_node(id, &self.graph))
+ Some(encodable_resolve_node(id, &self.root, &self.graph))
}).collect::<Vec<EncodableDependency>>();
EncodableResolve {
- package: encodable,
- root: encodable_resolve_node(&self.root, &self.graph)
+ package: Some(encodable),
+ root: encodable_resolve_node(&self.root, &self.root, &self.graph)
}.encode(s)
}
}
-fn encodable_resolve_node(id: &PackageId, graph: &Graph<PackageId>) -> EncodableDependency {
+fn encodable_resolve_node(id: &PackageId, root: &PackageId,
+ graph: &Graph<PackageId>) -> EncodableDependency {
let deps = graph.edges(id).map(|edge| {
let mut deps = edge.map(|e| {
- encodable_package_id(e)
+ encodable_package_id(e, root)
}).collect::<Vec<EncodablePackageId>>();
deps.sort();
deps
});
+ let source = if id.get_source_id() == root.get_source_id() {
+ None
+ } else {
+ Some(id.get_source_id().clone())
+ };
EncodableDependency {
name: id.get_name().to_string(),
version: id.get_version().to_string(),
- source: Some(id.get_source_id().clone()),
+ source: source,
dependencies: deps,
}
}
-fn encodable_package_id(id: &PackageId) -> EncodablePackageId {
+fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId {
+ let source = if id.get_source_id() == root.get_source_id() {
+ None
+ } else {
+ Some(id.get_source_id().clone())
+ };
EncodablePackageId {
name: id.get_name().to_string(),
version: id.get_version().to_string(),
- source: Some(id.get_source_id().clone()),
+ source: source,
}
}
registry: &'a mut R,
resolve: Resolve,
- // Eventually, we will have smarter logic for checking for conflicts in the resolve,
- // but without the registry, conflicts should not exist in practice, so this is just
- // a sanity check.
+ // Eventually, we will have smarter logic for checking for conflicts in the
+ // resolve, but without the registry, conflicts should not exist in
+ // practice, so this is just a sanity check.
seen: HashMap<(String, SourceId), semver::Version>
}
}
}
-pub fn resolve<R: Registry>(root: &PackageId, deps: &[Dependency], registry: &mut R)
- -> CargoResult<Resolve> {
+pub fn resolve<R: Registry>(root: &PackageId, deps: &[Dependency],
+ registry: &mut R) -> CargoResult<Resolve> {
log!(5, "resolve; deps={}", deps);
let mut context = Context::new(registry, root.clone());
fn resolve_deps<'a, R: Registry>(parent: &PackageId,
deps: &[Dependency],
- ctx: &mut Context<'a, R>) -> CargoResult<()> {
+ ctx: &mut Context<'a, R>)
+ -> CargoResult<()> {
if deps.is_empty() {
return Ok(());
}
Version required: {}",
dep.get_name(),
parent.get_name(),
- dep.get_namespace(),
+ dep.get_source_id(),
dep.get_version_req())));
}
single source for a particular package name ({}).", dep)));
}
- let summary = pkgs[0].clone();
+ let summary = &pkgs[0];
let name = summary.get_name().to_string();
let source_id = summary.get_source_id().clone();
let version = summary.get_version().clone();
use core::{Dependency, PackageId, Summary, Registry};
use util::{CargoResult, ToUrl};
- fn resolve<R: Registry>(pkg: &PackageId, deps: &[Dependency], registry: &mut R)
+ fn resolve<R: Registry>(pkg: &PackageId, deps: &[Dependency],
+ registry: &mut R)
-> CargoResult<Vec<PackageId>> {
Ok(try!(super::resolve(pkg, deps, registry)).iter().map(|p| p.clone()).collect())
}
#[test]
pub fn test_resolving_with_same_name() {
- let list = vec!(pkg_loc("foo", "http://first.example.com"),
- pkg_loc("foo", "http://second.example.com"));
+ let list = vec![pkg_loc("foo", "http://first.example.com"),
+ pkg_loc("foo", "http://second.example.com")];
let mut reg = registry(list);
let res = resolve(&pkg_id("root"),
&mut reg);
let mut names = loc_names([("foo", "http://first.example.com"),
- ("foo", "http://second.example.com")]);
+ ("foo", "http://second.example.com")]);
names.push(pkg_id("root"));
+use std::c_str::CString;
+use std::cmp::Ordering;
use std::collections::HashMap;
-use std::collections::hashmap::Values;
-use std::fmt;
+use std::collections::hashmap::{Values, MutEntries};
use std::fmt::{Show, Formatter};
+use std::fmt;
use std::hash;
-use std::c_str::CString;
-use std::cmp::Ordering;
+use std::iter;
+use std::mem;
use serialize::{Decodable, Decoder, Encodable, Encoder};
-use url;
use url::Url;
-use core::{Summary, Package, PackageId};
-use sources::{PathSource, GitSource};
+use core::{Summary, Package, PackageId, Registry, Dependency};
+use sources::{PathSource, GitSource, DummyRegistrySource};
use sources::git;
-use util::{Config, CargoResult, CargoError};
-use util::errors::human;
+use util::{human, Config, CargoResult, CargoError, ToUrl};
/// A Source finds and downloads remote packages based on names and
/// versions.
-pub trait Source {
+pub trait Source: Registry {
/// The update method performs any network operations required to
/// get the entire list of all names, versions and dependencies of
/// packages managed by the Source.
fn update(&mut self) -> CargoResult<()>;
- /// The list method lists all names, versions and dependencies of
- /// packages managed by the source. It assumes that `update` has
- /// already been called and no additional network operations are
- /// required.
- fn list(&self) -> CargoResult<Vec<Summary>>;
-
/// The download method fetches the full package for each name and
/// version specified.
fn download(&self, packages: &[PackageId]) -> CargoResult<()>;
if s.starts_with("file:") {
Ok(Local(Path::new(s.slice_from(5))))
} else {
- Url::parse(s).map(Remote).map_err(|e| {
+ s.to_url().map(Remote).map_err(|e| {
human(format!("invalid url `{}`: `{}", s, e))
})
}
SourceId {
kind: ref kind @ GitKind(..),
location: Remote(ref url),
- precise: None
+ precise: _,
} => {
kind.hash(into);
git::canonicalize_url(url.to_string().as_slice()).hash(into);
pub fn from_url(string: String) -> SourceId {
let mut parts = string.as_slice().splitn('+', 1);
- let kind = parts.nth(0).unwrap();
- let mut url = Url::parse(parts.nth(0).unwrap()).ok().expect("Invalid URL");
+ let kind = parts.next().unwrap();
+ let url = parts.next().unwrap();
match kind {
- "git" => {
- let reference = {
- url.path.query.iter()
- .find(|&&(ref k, ref v)| k.as_slice() == "ref")
- .map(|&(ref k, ref v)| v.to_string())
- .unwrap_or("master".to_string())
- .to_string()
+ "git" if url.starts_with("file:") => {
+ let url = url.slice_from(5);
+ let (url, precise) = match url.rfind('#') {
+ Some(pos) => {
+ (url.slice_to(pos), Some(url.slice_from(pos + 1)))
+ }
+ None => (url, None)
};
-
- url.path.query = url.path.query.iter()
- .filter(|&&(ref k,_)| k.as_slice() != "ref")
- .map(|q| q.clone())
- .collect();
-
- let precise = url.path.fragment.clone();
- url.path.fragment = None;
-
+ let (url, reference) = match url.find_str("?ref=") {
+ Some(pos) => {
+ (url.slice_to(pos), Some(url.slice_from(pos + 5)))
+ }
+ None => (url, None)
+ };
+ let reference = reference.unwrap_or("master");
+ let id = SourceId::new(GitKind(reference.to_string()),
+ Local(Path::new(url)));
+ match precise {
+ Some(p) => id.with_precise(p.to_string()),
+ None => id,
+ }
+ }
+ "git" => {
+ let mut url = url.to_url().unwrap();
+ let mut reference = "master".to_string();
+ let pairs = url.query_pairs().unwrap_or(Vec::new());
+ url.set_query_from_pairs(pairs.iter().filter(|&&(ref k, ref v)| {
+ if k.as_slice() == "ref" {
+ reference = v.clone();
+ false
+ } else {
+ true
+ }
+ }).map(|&(ref a, ref b)| (a.as_slice(), b.as_slice())));
+
+ let precise = mem::replace(&mut url.fragment, None);
SourceId::for_git(&url, reference.as_slice(), precise)
},
+ "registry" => SourceId::for_central(),
+ "path" => SourceId::for_path(&Path::new(url.slice_from(5))),
_ => fail!("Unsupported serialized SourceId")
}
}
pub fn to_url(&self) -> String {
match *self {
- SourceId { kind: PathKind, ref location, .. } => {
- fail!("Path sources are not included in the lockfile, so this is unimplemented");
+ SourceId { kind: PathKind, .. } => {
+ fail!("Path sources are not included in the lockfile, \
+ so this is unimplemented")
},
- SourceId { kind: GitKind(ref reference), ref location, ref precise, .. } => {
+ SourceId {
+ kind: GitKind(ref reference), ref location, ref precise, ..
+ } => {
let ref_str = if reference.as_slice() != "master" {
format!("?ref={}", reference)
} else {
pub fn for_central() -> SourceId {
SourceId::new(RegistryKind,
- Remote(Url::parse("https://example.com").unwrap()))
+ Remote("https://example.com".to_url().unwrap()))
}
pub fn get_location(&self) -> &Location {
};
box PathSource::new(path, self) as Box<Source>
},
- RegistryKind => unimplemented!()
+ RegistryKind => box DummyRegistrySource::new(self) as Box<Source>,
}
}
}
pub type Sources<'a> = Values<'a, SourceId, Box<Source>>;
+pub type SourcesMut<'a> = iter::Map<'static, (&'a SourceId, &'a mut Box<Source>),
+ &'a mut Source,
+ MutEntries<'a, SourceId, Box<Source>>>;
impl SourceMap {
pub fn new() -> SourceMap {
})
}
+ pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut Source> {
+ self.map.find_mut(id).map(|s| {
+ let s: &mut Source = *s;
+ s
+ })
+ }
+
pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&Source> {
self.get(pkg_id.get_source_id())
}
pub fn sources(&self) -> Sources {
self.map.values()
}
+
+ pub fn sources_mut(&mut self) -> SourcesMut {
+ self.map.mut_iter().map(|(_, v)| { let s: &mut Source = *v; s })
+ }
}
pub struct SourceSet {
}
}
-impl Source for SourceSet {
- fn update(&mut self) -> CargoResult<()> {
+impl Registry for SourceSet {
+ fn query(&mut self, name: &Dependency) -> CargoResult<Vec<Summary>> {
+ let mut ret = Vec::new();
+
for source in self.sources.mut_iter() {
- try!(source.update());
+ ret.push_all_move(try!(source.query(name)));
}
- Ok(())
+ Ok(ret)
}
+}
- fn list(&self) -> CargoResult<Vec<Summary>> {
- let mut ret = Vec::new();
-
- for source in self.sources.iter() {
- ret.push_all(try!(source.list()).as_slice());
+impl Source for SourceSet {
+ fn update(&mut self) -> CargoResult<()> {
+ for source in self.sources.mut_iter() {
+ try!(source.update());
}
- Ok(ret)
+ Ok(())
}
fn download(&self, packages: &[PackageId]) -> CargoResult<()> {
use std::collections::HashMap;
use std::io::File;
use serialize::Decodable;
+use rstoml = toml;
use core::registry::PackageRegistry;
use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId, Resolve, resolver};
manifest_path.dir_path()));
let source_ids = package.get_source_ids();
- let (packages, resolve, sources) = {
+ let (packages, resolve, resolve_with_overrides, sources) = {
let lockfile = manifest_path.dir_path().join("Cargo.lock");
let source_id = package.get_package_id().get_source_id();
let mut config = try!(Config::new(*shell, update, jobs, target.clone()));
- let mut registry =
- try!(PackageRegistry::new(source_ids, override_ids, &mut config));
+ let mut registry = try!(PackageRegistry::new(source_ids, &mut config));
let resolved = match try!(load_lockfile(&lockfile, source_id)) {
Some(r) => r,
}
};
- let req: Vec<PackageId> = resolved.iter().map(|r| r.clone()).collect();
+ try!(registry.add_overrides(override_ids));
+
+ let resolved_with_overrides =
+ try!(resolver::resolve(package.get_package_id(),
+ package.get_dependencies(),
+ &mut registry));
+
+ let req: Vec<PackageId> = resolved_with_overrides.iter().map(|r| {
+ r.clone()
+ }).collect();
let packages = try!(registry.get(req.as_slice()).wrap({
human("Unable to get packages from source")
}));
- (packages, resolved, registry.move_sources())
+ (packages, resolved, resolved_with_overrides, registry.move_sources())
};
debug!("packages={}", packages);
try!(scrape_target_config(&mut config, &user_configs));
try!(ops::compile_targets(env.as_slice(), targets.as_slice(), &package,
- &PackageSet::new(packages.as_slice()), &resolve, &sources, &mut config));
+ &PackageSet::new(packages.as_slice()),
+ &resolve_with_overrides, &sources,
+ &mut config));
}
- try!(ops::generate_lockfile(manifest_path, *shell, false));
+ try!(ops::write_resolve(&package, &resolve));
let test_executables: Vec<String> = targets.iter()
.filter_map(|target| {
let s = try!(f.read_to_string());
- let mut d = ::toml::Decoder::new(::toml::Table(try!(toml::parse(s.as_slice(), path))));
+ let table = rstoml::Table(try!(toml::parse(s.as_slice(), path)));
+ let mut d = rstoml::Decoder::new(table);
let v: resolver::EncodableResolve = Decodable::decode(&mut d).unwrap();
Ok(Some(try!(v.to_resolve(sid))))
}
-use std::collections::TreeMap;
-use std::io::fs::File;
-use serialize::{Encodable, Decodable};
-use toml;
-use toml::{Encoder, Decoder};
+use std::io::File;
+
+use serialize::Encodable;
+use toml::{mod, Encoder};
+
use core::registry::PackageRegistry;
-use core::{MultiShell, Source, Resolve, resolver};
+use core::{MultiShell, Source, Resolve, resolver, Package};
use sources::{PathSource};
use util::config::{Config};
use util::{CargoResult};
let package = try!(source.get_root_package());
debug!("loaded package; package={}", package);
- for key in package.get_manifest().get_unused_keys().iter() {
- try!(shell.warn(format!("unused manifest key: {}", key)));
- }
-
let source_ids = package.get_source_ids();
let resolve = {
let mut config = try!(Config::new(shell, update, None, None));
let mut registry =
- try!(PackageRegistry::new(source_ids, vec![], &mut config));
+ try!(PackageRegistry::new(source_ids, &mut config));
try!(resolver::resolve(package.get_package_id(),
package.get_dependencies(),
&mut registry))
};
- write_resolve(resolve);
+ try!(write_resolve(&package, &resolve));
Ok(())
}
-fn write_resolve(resolve: Resolve) {
+pub fn write_resolve(pkg: &Package, resolve: &Resolve) -> CargoResult<()> {
let mut e = Encoder::new();
resolve.encode(&mut e).unwrap();
- let mut out = String::new();
-
- let root = e.toml.find(&"root".to_string()).unwrap();
-
- out.push_str("[root]\n");
- emit_package(root.as_table().unwrap(), &mut out);
-
- let deps = e.toml.find(&"package".to_string()).unwrap().as_slice().unwrap();
-
- for dep in deps.iter() {
- let dep = dep.as_table().unwrap();
-
- out.push_str("[[package]]\n");
- emit_package(dep, &mut out);
- }
-
- let mut file = File::create(&Path::new("Cargo.lock"));
- write!(file, "{}", out);
-
- let mut d = Decoder::new(toml::Table(e.toml.clone()));
- let v: resolver::EncodableResolve = Decodable::decode(&mut d).unwrap();
+ let out = toml::Table(e.toml).to_string();
+ let loc = pkg.get_root().join("Cargo.lock");
+ try!(File::create(&loc).write_str(out.as_slice()));
-}
-
-fn emit_package(dep: &TreeMap<String, toml::Value>, out: &mut String) {
- out.push_str(format!("name = {}\n", lookup(dep, "name")).as_slice());
- out.push_str(format!("version = {}\n", lookup(dep, "version")).as_slice());
-
- dep.find(&"source".to_string()).map(|s| {
- out.push_str(format!("source = {}\n", lookup(dep, "source")).as_slice());
- });
-
- dep.find(&"dependencies".to_string()).map(|s| {
- let slice = s.as_slice().unwrap();
-
- if !slice.is_empty() {
- out.push_str("dependencies = [\n");
-
- for child in s.as_slice().unwrap().iter() {
- out.push_str(format!(" {},\n", child).as_slice());
- }
-
- out.push_str("]\n");
- }
- out.push_str("\n");
- });
-}
-
-fn lookup<'a>(table: &'a TreeMap<String, toml::Value>, key: &'static str) -> &'a toml::Value {
- table.find(&key.to_string()).expect(format!("Didn't find {}", key).as_slice())
+ Ok(())
}
use core::{Package, Target};
use util;
use util::hex::short_hash;
-use util::{CargoResult, Fresh, Dirty, Freshness};
+use util::{CargoResult, Fresh, Dirty, Freshness, internal, Require};
use super::job::Job;
use super::context::Context;
}
fn get_fingerprint(pkg: &Package, cx: &Context) -> CargoResult<String> {
- let source = cx.sources
- .get(pkg.get_package_id().get_source_id())
- .expect("BUG: Missing package source");
-
+ let id = pkg.get_package_id().get_source_id();
+ let source = try!(cx.sources.get(id).require(|| {
+ internal(format!("Missing package source for: {}", id))
+ }));
source.fingerprint(pkg)
}
pub use self::cargo_run::run;
pub use self::cargo_new::{new, NewOptions};
pub use self::cargo_doc::{doc, DocOptions};
-pub use self::cargo_generate_lockfile::generate_lockfile;
+pub use self::cargo_generate_lockfile::{generate_lockfile, write_resolve};
mod cargo_clean;
mod cargo_compile;
use std::str;
use core::source::{Source, SourceId, GitKind, Location, Remote, Local};
-use core::{Package,PackageId,Summary};
+use core::{Package, PackageId, Summary, Registry, Dependency};
use util::{CargoResult, Config, to_hex};
use sources::PathSource;
use sources::git::utils::{GitReference,GitRemote,Master,Other};
}
}
+impl<'a, 'b> Registry for GitSource<'a, 'b> {
+ fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
+ let src = self.path_source.as_mut()
+ .expect("BUG: update() must be called before query()");
+ src.query(dep)
+ }
+}
+
impl<'a, 'b> Source for GitSource<'a, 'b> {
fn update(&mut self) -> CargoResult<()> {
let should_update = self.config.update_remotes() || {
self.path_source.as_mut().unwrap().update()
}
- fn list(&self) -> CargoResult<Vec<Summary>> {
- self.path_source.as_ref().expect("BUG: update() must be called before list()").list()
- }
-
fn download(&self, _: &[PackageId]) -> CargoResult<()> {
// TODO: assert! that the PackageId is contained by the source
Ok(())
pub use self::path::PathSource;
pub use self::git::GitSource;
+pub use self::registry::DummyRegistrySource;
pub mod path;
pub mod git;
+pub mod registry;
use std::fmt;
use std::io::fs;
-use core::{Package, PackageId, Summary, SourceId, Source};
+use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry};
use ops;
use util::{CargoResult, internal, internal_error};
}
}
+impl Registry for PathSource {
+ fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
+ let mut summaries: Vec<Summary> = self.packages.iter()
+ .map(|p| p.get_summary().clone())
+ .collect();
+ summaries.query(dep)
+ }
+}
+
impl Source for PathSource {
fn update(&mut self) -> CargoResult<()> {
if !self.updated {
Ok(())
}
- fn list(&self) -> CargoResult<Vec<Summary>> {
- Ok(self.packages.iter()
- .map(|p| p.get_summary().clone())
- .collect())
- }
-
fn download(&self, _: &[PackageId]) -> CargoResult<()>{
// TODO: assert! that the PackageId is contained by the source
Ok(())
let mut max = 0;
for dir in try!(fs::readdir(path)).iter() {
if is_root && dir.filename_str() == Some("target") { continue }
+ if is_root && dir.filename_str() == Some("Cargo.lock") { continue }
max = cmp::max(max, try!(walk(dir, false)));
}
return Ok(max)
--- /dev/null
+use semver::Version;
+
+use core::{Source, SourceId, PackageId, Package, Summary, Registry};
+use core::Dependency;
+use util::CargoResult;
+
+pub struct DummyRegistrySource {
+ id: SourceId,
+}
+
+impl DummyRegistrySource {
+ pub fn new(id: &SourceId) -> DummyRegistrySource {
+ DummyRegistrySource { id: id.clone() }
+ }
+}
+
+impl Registry for DummyRegistrySource {
+ // This is a hack to get tests to pass, this is just a dummy registry.
+ fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
+ let mut version = Version {
+ major: 0, minor: 0, patch: 0,
+ pre: Vec::new(), build: Vec::new(),
+ };
+ for i in range(0, 10) {
+ version.minor = i;
+ if dep.get_version_req().matches(&version) { break }
+ }
+ let pkgid = PackageId::new(dep.get_name().as_slice(),
+ version,
+ &self.id).unwrap();
+ Ok(vec![Summary::new(&pkgid, [])])
+ }
+}
+
+impl Source for DummyRegistrySource {
+ fn update(&mut self) -> CargoResult<()> { Ok(()) }
+ fn download(&self, _packages: &[PackageId]) -> CargoResult<()> { Ok(()) }
+ fn get(&self, _packages: &[PackageId]) -> CargoResult<Vec<Package>> {
+ Ok(Vec::new())
+ }
+ fn fingerprint(&self, _pkg: &Package) -> CargoResult<String> {
+ unimplemented!()
+ }
+}
use std::hash::Hash;
use std::collections::{HashMap, HashSet};
use std::collections::hashmap::{Keys, SetItems};
-use serialize::Decodable;
pub struct Graph<N> {
nodes: HashMap<N, HashSet<N>>
assert_that(project.cargo_process("cargo-build"),
execs()
.with_stdout(format!("{} git repository `file:{}`\n\
- {} dep1 v0.5.0 (file:{})\n\
+ {} dep1 v0.5.0 (file:{}#[..])\n\
{} foo v0.5.0 (file:{})\n",
UPDATING, git_root.display(),
COMPILING, git_root.display(),
execs().with_stdout("hello world\n"));
})
-test!(override_git_dep {
- let p = project("foo");
- let root = p.root().clone();
- let p = p
- .file(".cargo/config", format!(r#"
- paths = ['{}/baz']
- "#, root.display()))
- .file("Cargo.toml", r#"
- [package]
-
- name = "foo"
- version = "0.5.0"
- authors = ["wycats@example.com"]
-
- [dependencies.bar]
- path = "bar"
- "#)
- .file("src/main.rs", "extern crate bar; fn main() {}")
- .file("bar/Cargo.toml", r#"
- [package]
-
- name = "bar"
- version = "0.5.0"
- authors = ["wycats@example.com"]
-
- [dependencies.baz]
- git = 'git://example.com/path/to/nowhere'
- "#)
- .file("bar/src/lib.rs", "extern crate baz;")
- .file("baz/Cargo.toml", r#"
- [package]
-
- name = "baz"
- version = "0.5.0"
- authors = ["wycats@example.com"]
- "#)
- .file("baz/src/lib.rs", "");
-
- assert_that(p.cargo_process("cargo-build"),
- execs()
- .with_stdout(format!("{compiling} baz v0.5.0 (file:{dir}{sep}baz)\n\
- {compiling} bar v0.5.0 (file:{dir})\n\
- {compiling} foo v0.5.0 (file:{dir})\n",
- compiling = COMPILING, dir = root.display(),
- sep = path::SEP))
- .with_stderr(""));
-
- assert_that(&p.bin("foo"), existing_file());
-
- assert_that(
- cargo::util::process(p.bin("foo")),
- execs().with_stdout(""));
-})
-
test!(cargo_compile_git_dep_branch {
let project = project("foo");
let git_project = git_repo("dep1", |project| {
assert_that(project.cargo_process("cargo-build"),
execs()
.with_stdout(format!("{} git repository `file:{}`\n\
- {} dep1 v0.5.0 (file:{}#ref=branchy)\n\
+ {} dep1 v0.5.0 (file:{}?ref=branchy#[..])\n\
{} foo v0.5.0 (file:{})\n",
UPDATING, git_root.display(),
COMPILING, git_root.display(),
assert_that(project.cargo_process("cargo-build"),
execs()
.with_stdout(format!("{} git repository `file:{}`\n\
- {} dep1 v0.5.0 (file:{}#ref=v0.1.0)\n\
+ {} dep1 v0.5.0 (file:{}?ref=v0.1.0#[..])\n\
{} foo v0.5.0 (file:{})\n",
UPDATING, git_root.display(),
COMPILING, git_root.display(),
// First time around we should compile both foo and bar
assert_that(p.cargo_process("cargo-build"),
execs().with_stdout(format!("{} git repository `file:{}`\n\
- {} bar v0.5.0 (file:{})\n\
+ {} bar v0.5.0 (file:{}#[..])\n\
{} foo v0.5.0 (file:{})\n",
UPDATING, git_project.root().display(),
COMPILING, git_project.root().display(),
// Don't recompile the second time
assert_that(p.process(cargo_dir().join("cargo-build")),
- execs().with_stdout(format!("{} bar v0.5.0 (file:{})\n\
+ execs().with_stdout(format!("{} bar v0.5.0 (file:{}#[..])\n\
{} foo v0.5.0 (file:{})\n",
FRESH, git_project.root().display(),
FRESH, p.root().display())));
"#).assert();
assert_that(p.process(cargo_dir().join("cargo-build")),
- execs().with_stdout(format!("{} bar v0.5.0 (file:{})\n\
+ execs().with_stdout(format!("{} bar v0.5.0 (file:{}#[..])\n\
{} foo v0.5.0 (file:{})\n",
FRESH, git_project.root().display(),
FRESH, p.root().display())));
assert_that(p.process(cargo_dir().join("cargo-build")).arg("-u"),
execs().with_stdout(format!("{} git repository `file:{}`\n\
- {} bar v0.5.0 (file:{})\n\
+ {} bar v0.5.0 (file:{}#[..])\n\
{} foo v0.5.0 (file:{})\n",
UPDATING, git_project.root().display(),
FRESH, git_project.root().display(),
assert_that(p.process(cargo_dir().join("cargo-build")).arg("-u"),
execs().with_stdout(format!("{} git repository `file:{}`\n\
- {} bar v0.5.0 (file:{})\n\
+ {} bar v0.5.0 (file:{}#[..])\n\
{} foo v0.5.0 (file:{})\n",
UPDATING, git_project.root().display(),
COMPILING, git_project.root().display(),
mod test_cargo_compile_path_deps;
mod test_cargo_test;
mod test_shell;
-// mod test_cargo_cross_compile;
+mod test_cargo_cross_compile;
mod test_cargo_run;
mod test_cargo_version;
mod test_cargo_new;